from keras.datasets import mnist
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from tensorflow.keras.utils import to_categorical
(X_train, y_train), (X_test, y_test) = mnist.load_data()
print(X_train.shape)
print(X_test.shape)
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz 11493376/11490434 [==============================] - 0s 0us/step 11501568/11490434 [==============================] - 0s 0us/step (60000, 28, 28) (10000, 28, 28)
for i in range(9):
plt.subplot(330 + 1 + i)
plt.imshow(X_train[i], cmap=plt.get_cmap('gray'))
plt.show()
X_train=(X_train.astype(float).reshape(X_train.shape[0], 784))/255
y_train=(y_train.astype(int))
X_test=(X_test.astype(float).reshape(X_test.shape[0], 784))/255
y_test=(y_test.astype(int))
print(X_train.shape)
print(y_train.shape)
(60000, 784) (60000,)
import tensorflow as tf
from tensorflow import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Flatten
from keras.optimizers import Adam
from tensorflow.keras import models
keras.backend.clear_session()
model1 = Sequential()
model1.add(Dense(1, activation='relu', input_shape=[784]))
model1.add(Dense(10, activation='softmax'))
model1.summary()
model1.compile(optimizer='rmsprop', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 1) 785 _________________________________________________________________ dense_1 (Dense) (None, 10) 20 ================================================================= Total params: 805 Trainable params: 805 Non-trainable params: 0 _________________________________________________________________
from keras.utils.vis_utils import plot_model
plot_model(model_PCA, show_shapes=True, show_layer_names=True)
history1=model1.fit(X_train, y_train, batch_size=32, validation_data=(X_test, y_test), epochs=150)
Epoch 1/150 1875/1875 [==============================] - 15s 1ms/step - loss: 2.0551 - accuracy: 0.2119 - val_loss: 1.8202 - val_accuracy: 0.2649 Epoch 2/150 1875/1875 [==============================] - 2s 999us/step - loss: 1.7944 - accuracy: 0.2698 - val_loss: 1.7310 - val_accuracy: 0.2880 Epoch 3/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.7230 - accuracy: 0.2886 - val_loss: 1.6961 - val_accuracy: 0.3264 Epoch 4/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6945 - accuracy: 0.3262 - val_loss: 1.6786 - val_accuracy: 0.3321 Epoch 5/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6788 - accuracy: 0.3370 - val_loss: 1.6684 - val_accuracy: 0.3407 Epoch 6/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6687 - accuracy: 0.3419 - val_loss: 1.6576 - val_accuracy: 0.3412 Epoch 7/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6673 - accuracy: 0.3419 - val_loss: 1.6521 - val_accuracy: 0.3448 Epoch 8/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6553 - accuracy: 0.3480 - val_loss: 1.6461 - val_accuracy: 0.3498 Epoch 9/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6527 - accuracy: 0.3505 - val_loss: 1.6499 - val_accuracy: 0.3454 Epoch 10/150 1875/1875 [==============================] - 2s 996us/step - loss: 1.6418 - accuracy: 0.3538 - val_loss: 1.6419 - val_accuracy: 0.3560 Epoch 11/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6501 - accuracy: 0.3562 - val_loss: 1.6401 - val_accuracy: 0.3546 Epoch 12/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6419 - accuracy: 0.3603 - val_loss: 1.6369 - val_accuracy: 0.3614 Epoch 13/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6435 - accuracy: 0.3567 - val_loss: 1.6345 - val_accuracy: 0.3647 Epoch 14/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6330 - accuracy: 0.3641 - val_loss: 1.6333 - val_accuracy: 0.3663 Epoch 15/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6369 - accuracy: 0.3662 - val_loss: 1.6336 - val_accuracy: 0.3598 Epoch 16/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6430 - accuracy: 0.3629 - val_loss: 1.6305 - val_accuracy: 0.3697 Epoch 17/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6282 - accuracy: 0.3703 - val_loss: 1.6248 - val_accuracy: 0.3719 Epoch 18/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6305 - accuracy: 0.3716 - val_loss: 1.6220 - val_accuracy: 0.3653 Epoch 19/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6229 - accuracy: 0.3678 - val_loss: 1.6191 - val_accuracy: 0.3600 Epoch 20/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6242 - accuracy: 0.3677 - val_loss: 1.6096 - val_accuracy: 0.3710 Epoch 21/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6248 - accuracy: 0.3687 - val_loss: 1.6052 - val_accuracy: 0.3754 Epoch 22/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6241 - accuracy: 0.3765 - val_loss: 1.6045 - val_accuracy: 0.3663 Epoch 23/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6127 - accuracy: 0.3870 - val_loss: 1.5996 - val_accuracy: 0.3756 Epoch 24/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6148 - accuracy: 0.3844 - val_loss: 1.5933 - val_accuracy: 0.3808 Epoch 25/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6101 - accuracy: 0.3868 - val_loss: 1.5877 - val_accuracy: 0.3869 Epoch 26/150 1875/1875 [==============================] - 2s 999us/step - loss: 1.5984 - accuracy: 0.3871 - val_loss: 1.5881 - val_accuracy: 0.3639 Epoch 27/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.6082 - accuracy: 0.3777 - val_loss: 1.5836 - val_accuracy: 0.3899 Epoch 28/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5985 - accuracy: 0.3951 - val_loss: 1.5776 - val_accuracy: 0.3779 Epoch 29/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5927 - accuracy: 0.3924 - val_loss: 1.5802 - val_accuracy: 0.4072 Epoch 30/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5939 - accuracy: 0.3992 - val_loss: 1.5736 - val_accuracy: 0.4061 Epoch 31/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5894 - accuracy: 0.3961 - val_loss: 1.5693 - val_accuracy: 0.4010 Epoch 32/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5845 - accuracy: 0.3969 - val_loss: 1.5695 - val_accuracy: 0.3886 Epoch 33/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5807 - accuracy: 0.3994 - val_loss: 1.5679 - val_accuracy: 0.4030 Epoch 34/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5807 - accuracy: 0.3922 - val_loss: 1.5625 - val_accuracy: 0.3907 Epoch 35/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5885 - accuracy: 0.3859 - val_loss: 1.5627 - val_accuracy: 0.3844 Epoch 36/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5780 - accuracy: 0.3956 - val_loss: 1.5605 - val_accuracy: 0.3944 Epoch 37/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5695 - accuracy: 0.3940 - val_loss: 1.5584 - val_accuracy: 0.3839 Epoch 38/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5763 - accuracy: 0.3837 - val_loss: 1.5556 - val_accuracy: 0.3947 Epoch 39/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5706 - accuracy: 0.3876 - val_loss: 1.5544 - val_accuracy: 0.3947 Epoch 40/150 1875/1875 [==============================] - 2s 990us/step - loss: 1.5685 - accuracy: 0.3886 - val_loss: 1.5533 - val_accuracy: 0.3914 Epoch 41/150 1875/1875 [==============================] - 2s 982us/step - loss: 1.5649 - accuracy: 0.3877 - val_loss: 1.5537 - val_accuracy: 0.3858 Epoch 42/150 1875/1875 [==============================] - 2s 990us/step - loss: 1.5657 - accuracy: 0.3831 - val_loss: 1.5567 - val_accuracy: 0.3856 Epoch 43/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5671 - accuracy: 0.3858 - val_loss: 1.5510 - val_accuracy: 0.3903 Epoch 44/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5707 - accuracy: 0.3808 - val_loss: 1.5574 - val_accuracy: 0.3779 Epoch 45/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5577 - accuracy: 0.3870 - val_loss: 1.5534 - val_accuracy: 0.3873 Epoch 46/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5665 - accuracy: 0.3802 - val_loss: 1.5476 - val_accuracy: 0.3771 Epoch 47/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5611 - accuracy: 0.3794 - val_loss: 1.5466 - val_accuracy: 0.3875 Epoch 48/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5620 - accuracy: 0.3806 - val_loss: 1.5494 - val_accuracy: 0.3903 Epoch 49/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5679 - accuracy: 0.3769 - val_loss: 1.5497 - val_accuracy: 0.3797 Epoch 50/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5634 - accuracy: 0.3763 - val_loss: 1.5487 - val_accuracy: 0.3892 Epoch 51/150 1875/1875 [==============================] - 2s 993us/step - loss: 1.5680 - accuracy: 0.3779 - val_loss: 1.5444 - val_accuracy: 0.3791 Epoch 52/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5503 - accuracy: 0.3765 - val_loss: 1.5449 - val_accuracy: 0.3831 Epoch 53/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5650 - accuracy: 0.3734 - val_loss: 1.5433 - val_accuracy: 0.3771 Epoch 54/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5620 - accuracy: 0.3753 - val_loss: 1.5429 - val_accuracy: 0.3779 Epoch 55/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5551 - accuracy: 0.3811 - val_loss: 1.5401 - val_accuracy: 0.3859 Epoch 56/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5518 - accuracy: 0.3786 - val_loss: 1.5398 - val_accuracy: 0.3848 Epoch 57/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5554 - accuracy: 0.3779 - val_loss: 1.5373 - val_accuracy: 0.3828 Epoch 58/150 1875/1875 [==============================] - 2s 991us/step - loss: 1.5560 - accuracy: 0.3817 - val_loss: 1.5400 - val_accuracy: 0.3827 Epoch 59/150 1875/1875 [==============================] - 2s 972us/step - loss: 1.5524 - accuracy: 0.3819 - val_loss: 1.5373 - val_accuracy: 0.3873 Epoch 60/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5508 - accuracy: 0.3828 - val_loss: 1.5361 - val_accuracy: 0.3925 Epoch 61/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5564 - accuracy: 0.3887 - val_loss: 1.5341 - val_accuracy: 0.3877 Epoch 62/150 1875/1875 [==============================] - 2s 986us/step - loss: 1.5466 - accuracy: 0.3883 - val_loss: 1.5386 - val_accuracy: 0.3856 Epoch 63/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5530 - accuracy: 0.3897 - val_loss: 1.5324 - val_accuracy: 0.3955 Epoch 64/150 1875/1875 [==============================] - 2s 999us/step - loss: 1.5490 - accuracy: 0.3914 - val_loss: 1.5318 - val_accuracy: 0.3922 Epoch 65/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5425 - accuracy: 0.3931 - val_loss: 1.5309 - val_accuracy: 0.3952 Epoch 66/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5480 - accuracy: 0.3946 - val_loss: 1.5349 - val_accuracy: 0.3862 Epoch 67/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5451 - accuracy: 0.3946 - val_loss: 1.5284 - val_accuracy: 0.3939 Epoch 68/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5365 - accuracy: 0.3959 - val_loss: 1.5323 - val_accuracy: 0.3958 Epoch 69/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5305 - accuracy: 0.3979 - val_loss: 1.5287 - val_accuracy: 0.3958 Epoch 70/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5378 - accuracy: 0.3989 - val_loss: 1.5277 - val_accuracy: 0.3958 Epoch 71/150 1875/1875 [==============================] - 2s 994us/step - loss: 1.5376 - accuracy: 0.4019 - val_loss: 1.5304 - val_accuracy: 0.3961 Epoch 72/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5408 - accuracy: 0.3971 - val_loss: 1.5313 - val_accuracy: 0.4016 Epoch 73/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5335 - accuracy: 0.4028 - val_loss: 1.5279 - val_accuracy: 0.4041 Epoch 74/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5452 - accuracy: 0.3965 - val_loss: 1.5259 - val_accuracy: 0.4001 Epoch 75/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5327 - accuracy: 0.3992 - val_loss: 1.5241 - val_accuracy: 0.4002 Epoch 76/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5397 - accuracy: 0.4002 - val_loss: 1.5383 - val_accuracy: 0.4034 Epoch 77/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5422 - accuracy: 0.3987 - val_loss: 1.5239 - val_accuracy: 0.3960 Epoch 78/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5461 - accuracy: 0.4003 - val_loss: 1.5226 - val_accuracy: 0.4095 Epoch 79/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5350 - accuracy: 0.4054 - val_loss: 1.5238 - val_accuracy: 0.3913 Epoch 80/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5423 - accuracy: 0.3988 - val_loss: 1.5237 - val_accuracy: 0.4076 Epoch 81/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5458 - accuracy: 0.3990 - val_loss: 1.5211 - val_accuracy: 0.4076 Epoch 82/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5301 - accuracy: 0.4021 - val_loss: 1.5238 - val_accuracy: 0.4042 Epoch 83/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5329 - accuracy: 0.4027 - val_loss: 1.5233 - val_accuracy: 0.4003 Epoch 84/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5311 - accuracy: 0.3987 - val_loss: 1.5247 - val_accuracy: 0.3947 Epoch 85/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5307 - accuracy: 0.4031 - val_loss: 1.5233 - val_accuracy: 0.4010 Epoch 86/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5273 - accuracy: 0.4019 - val_loss: 1.5250 - val_accuracy: 0.3970 Epoch 87/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5378 - accuracy: 0.4030 - val_loss: 1.5249 - val_accuracy: 0.3949 Epoch 88/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5399 - accuracy: 0.4013 - val_loss: 1.5260 - val_accuracy: 0.4007 Epoch 89/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5401 - accuracy: 0.3967 - val_loss: 1.5296 - val_accuracy: 0.3920 Epoch 90/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5347 - accuracy: 0.4026 - val_loss: 1.5306 - val_accuracy: 0.3898 Epoch 91/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5369 - accuracy: 0.4008 - val_loss: 1.5259 - val_accuracy: 0.4049 Epoch 92/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5379 - accuracy: 0.3988 - val_loss: 1.5274 - val_accuracy: 0.3987 Epoch 93/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5383 - accuracy: 0.3970 - val_loss: 1.5228 - val_accuracy: 0.4041 Epoch 94/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5473 - accuracy: 0.3971 - val_loss: 1.5219 - val_accuracy: 0.4017 Epoch 95/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5337 - accuracy: 0.4023 - val_loss: 1.5310 - val_accuracy: 0.4078 Epoch 96/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5241 - accuracy: 0.4053 - val_loss: 1.5319 - val_accuracy: 0.3977 Epoch 97/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5382 - accuracy: 0.3988 - val_loss: 1.5243 - val_accuracy: 0.3951 Epoch 98/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5313 - accuracy: 0.3992 - val_loss: 1.5307 - val_accuracy: 0.3924 Epoch 99/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5378 - accuracy: 0.3969 - val_loss: 1.5289 - val_accuracy: 0.3923 Epoch 100/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5261 - accuracy: 0.4030 - val_loss: 1.5265 - val_accuracy: 0.3932 Epoch 101/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5342 - accuracy: 0.4020 - val_loss: 1.5215 - val_accuracy: 0.4030 Epoch 102/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5315 - accuracy: 0.4023 - val_loss: 1.5236 - val_accuracy: 0.4001 Epoch 103/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5367 - accuracy: 0.4001 - val_loss: 1.5223 - val_accuracy: 0.3957 Epoch 104/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5395 - accuracy: 0.3981 - val_loss: 1.5260 - val_accuracy: 0.3965 Epoch 105/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5391 - accuracy: 0.4018 - val_loss: 1.5238 - val_accuracy: 0.3944 Epoch 106/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5411 - accuracy: 0.4011 - val_loss: 1.5301 - val_accuracy: 0.4004 Epoch 107/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5393 - accuracy: 0.3990 - val_loss: 1.5238 - val_accuracy: 0.4050 Epoch 108/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5344 - accuracy: 0.3967 - val_loss: 1.5213 - val_accuracy: 0.4002 Epoch 109/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5367 - accuracy: 0.3990 - val_loss: 1.5248 - val_accuracy: 0.3993 Epoch 110/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5362 - accuracy: 0.4000 - val_loss: 1.5351 - val_accuracy: 0.3951 Epoch 111/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5308 - accuracy: 0.4003 - val_loss: 1.5218 - val_accuracy: 0.3948 Epoch 112/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5393 - accuracy: 0.3987 - val_loss: 1.5275 - val_accuracy: 0.4032 Epoch 113/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5344 - accuracy: 0.3953 - val_loss: 1.5307 - val_accuracy: 0.3990 Epoch 114/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5344 - accuracy: 0.3999 - val_loss: 1.5223 - val_accuracy: 0.3993 Epoch 115/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5349 - accuracy: 0.4022 - val_loss: 1.5252 - val_accuracy: 0.3986 Epoch 116/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5422 - accuracy: 0.3950 - val_loss: 1.5256 - val_accuracy: 0.3906 Epoch 117/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5267 - accuracy: 0.4014 - val_loss: 1.5220 - val_accuracy: 0.3959 Epoch 118/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5346 - accuracy: 0.3971 - val_loss: 1.5263 - val_accuracy: 0.3984 Epoch 119/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5322 - accuracy: 0.3981 - val_loss: 1.5210 - val_accuracy: 0.3997 Epoch 120/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5324 - accuracy: 0.3994 - val_loss: 1.5250 - val_accuracy: 0.3975 Epoch 121/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5367 - accuracy: 0.3995 - val_loss: 1.5340 - val_accuracy: 0.3951 Epoch 122/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5297 - accuracy: 0.3980 - val_loss: 1.5467 - val_accuracy: 0.3967 Epoch 123/150 1875/1875 [==============================] - 2s 998us/step - loss: 1.5239 - accuracy: 0.4047 - val_loss: 1.5238 - val_accuracy: 0.3985 Epoch 124/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5272 - accuracy: 0.3988 - val_loss: 1.5221 - val_accuracy: 0.4055 Epoch 125/150 1875/1875 [==============================] - 2s 999us/step - loss: 1.5401 - accuracy: 0.3975 - val_loss: 1.5266 - val_accuracy: 0.3992 Epoch 126/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5413 - accuracy: 0.3944 - val_loss: 1.5238 - val_accuracy: 0.3978 Epoch 127/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5397 - accuracy: 0.4004 - val_loss: 1.5199 - val_accuracy: 0.3989 Epoch 128/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5333 - accuracy: 0.3962 - val_loss: 1.5348 - val_accuracy: 0.4045 Epoch 129/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5376 - accuracy: 0.3984 - val_loss: 1.5290 - val_accuracy: 0.3947 Epoch 130/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5390 - accuracy: 0.3989 - val_loss: 1.5252 - val_accuracy: 0.3848 Epoch 131/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5377 - accuracy: 0.3977 - val_loss: 1.5212 - val_accuracy: 0.3989 Epoch 132/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5239 - accuracy: 0.4032 - val_loss: 1.5290 - val_accuracy: 0.3898 Epoch 133/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5300 - accuracy: 0.3983 - val_loss: 1.5274 - val_accuracy: 0.4011 Epoch 134/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5350 - accuracy: 0.3975 - val_loss: 1.5255 - val_accuracy: 0.4010 Epoch 135/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5390 - accuracy: 0.3984 - val_loss: 1.5250 - val_accuracy: 0.3874 Epoch 136/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5296 - accuracy: 0.3977 - val_loss: 1.5251 - val_accuracy: 0.3940 Epoch 137/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5344 - accuracy: 0.3965 - val_loss: 1.5245 - val_accuracy: 0.3976 Epoch 138/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5211 - accuracy: 0.3990 - val_loss: 1.5259 - val_accuracy: 0.3959 Epoch 139/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5389 - accuracy: 0.3983 - val_loss: 1.5236 - val_accuracy: 0.3954 Epoch 140/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5288 - accuracy: 0.3957 - val_loss: 1.5252 - val_accuracy: 0.3961 Epoch 141/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5330 - accuracy: 0.3989 - val_loss: 1.5427 - val_accuracy: 0.3964 Epoch 142/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5348 - accuracy: 0.3955 - val_loss: 1.5233 - val_accuracy: 0.3992 Epoch 143/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5302 - accuracy: 0.3965 - val_loss: 1.5241 - val_accuracy: 0.3976 Epoch 144/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5341 - accuracy: 0.3981 - val_loss: 1.5264 - val_accuracy: 0.3944 Epoch 145/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5303 - accuracy: 0.3998 - val_loss: 1.5278 - val_accuracy: 0.3971 Epoch 146/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5364 - accuracy: 0.3986 - val_loss: 1.5279 - val_accuracy: 0.3920 Epoch 147/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5387 - accuracy: 0.3959 - val_loss: 1.5269 - val_accuracy: 0.3924 Epoch 148/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5330 - accuracy: 0.3971 - val_loss: 1.5375 - val_accuracy: 0.3820 Epoch 149/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5281 - accuracy: 0.3953 - val_loss: 1.5244 - val_accuracy: 0.3939 Epoch 150/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.5358 - accuracy: 0.4025 - val_loss: 1.5213 - val_accuracy: 0.3967
from matplotlib.pyplot import figure
plt.figure(figsize=(15,15))
history1_df=pd.DataFrame(history1.history)
plt.plot(history1_df, linewidth=5)
plt.legend(history1_df.columns.to_list())
plt.show()
y_pred = model1.predict_classes(X_test)
plt.figure(figsize=(15,15))
for i in range(16):
plt.subplot(4,4, i+1)
plt.imshow(X_test[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_pred[i]), fontsize=15)
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
layer_outputs= [layer.output for layer in model1.layers]
activation_model=models.Model(inputs=model1.input, outputs=layer_outputs)
print(f"There are {len(layer_outputs)} layers")
There are 2 layers
activations = activation_model.predict(X_train)
hidden_layer_activation = activations[0]
output_layer_activations = activations[1]
print(hidden_layer_activation.shape)
print(f"The maximum activation value of the hidden nodes in the hidden layer is {hidden_layer_activation.max()}")
(60000, 1) The maximum activation value of the hidden nodes in the hidden layer is 16.233015060424805
y_train_pred= model1.predict_classes(X_train)
activation_data = {'actual_class':y_train}
activation_data["Node1"] = hidden_layer_activation[:,0]
activation_df = pd.DataFrame(activation_data)
for i in range(0,10):
activation_df['Output_Node_{}'.format(i)]=output_layer_activations[:,i]
activation_df['model1_prediction']=y_train_pred
y_train_pred = model1.predict_classes(X_train)
plt.figure(figsize=(15,15))
for i in range(0,16):
plt.subplot(4,4, i+1)
plt.imshow(X_train[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_train_pred[i]), fontsize=15)
display(activation_df.round(3).T.iloc[:,0:16])
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| actual_class | 5.000 | 0.000 | 4.000 | 1.000 | 9.000 | 2.000 | 1.000 | 3.000 | 1.000 | 4.000 | 3.000 | 5.000 | 3.000 | 6.000 | 1.000 | 7.000 |
| Node1 | 4.700 | 7.441 | 2.100 | 0.529 | 1.919 | 4.140 | 0.000 | 4.574 | 0.000 | 2.707 | 2.737 | 3.628 | 4.544 | 6.615 | 0.000 | 1.375 |
| Output_Node_0 | 0.024 | 0.350 | 0.000 | 0.000 | 0.000 | 0.010 | 0.000 | 0.020 | 0.000 | 0.001 | 0.001 | 0.004 | 0.019 | 0.212 | 0.000 | 0.000 |
| Output_Node_1 | 0.000 | 0.000 | 0.019 | 0.754 | 0.036 | 0.000 | 0.933 | 0.000 | 0.933 | 0.002 | 0.002 | 0.000 | 0.000 | 0.000 | 0.933 | 0.189 |
| Output_Node_2 | 0.239 | 0.109 | 0.024 | 0.000 | 0.017 | 0.201 | 0.000 | 0.232 | 0.000 | 0.064 | 0.066 | 0.154 | 0.231 | 0.187 | 0.000 | 0.004 |
| Output_Node_3 | 0.195 | 0.022 | 0.072 | 0.001 | 0.055 | 0.217 | 0.000 | 0.201 | 0.000 | 0.141 | 0.144 | 0.214 | 0.203 | 0.058 | 0.000 | 0.019 |
| Output_Node_4 | 0.061 | 0.001 | 0.228 | 0.014 | 0.205 | 0.112 | 0.002 | 0.070 | 0.002 | 0.260 | 0.259 | 0.174 | 0.073 | 0.003 | 0.002 | 0.115 |
| Output_Node_5 | 0.197 | 0.086 | 0.020 | 0.000 | 0.014 | 0.167 | 0.000 | 0.192 | 0.000 | 0.054 | 0.056 | 0.128 | 0.190 | 0.150 | 0.000 | 0.004 |
| Output_Node_6 | 0.074 | 0.409 | 0.001 | 0.000 | 0.000 | 0.037 | 0.000 | 0.064 | 0.000 | 0.003 | 0.003 | 0.018 | 0.062 | 0.330 | 0.000 | 0.000 |
| Output_Node_7 | 0.002 | 0.000 | 0.270 | 0.168 | 0.315 | 0.007 | 0.052 | 0.002 | 0.052 | 0.126 | 0.120 | 0.022 | 0.002 | 0.000 | 0.052 | 0.393 |
| Output_Node_8 | 0.198 | 0.023 | 0.073 | 0.001 | 0.056 | 0.220 | 0.000 | 0.205 | 0.000 | 0.143 | 0.147 | 0.218 | 0.206 | 0.059 | 0.000 | 0.019 |
| Output_Node_9 | 0.010 | 0.000 | 0.292 | 0.061 | 0.301 | 0.029 | 0.013 | 0.013 | 0.013 | 0.208 | 0.203 | 0.068 | 0.014 | 0.000 | 0.013 | 0.257 |
| model1_prediction | 2.000 | 6.000 | 9.000 | 1.000 | 7.000 | 8.000 | 1.000 | 2.000 | 1.000 | 4.000 | 4.000 | 8.000 | 2.000 | 6.000 | 1.000 | 7.000 |
activation_df['accurate']=np.where(activation_df['actual_class']==activation_df['model1_prediction'],1,0)
class_error=activation_df[activation_df['actual_class']!=activation_df['model1_prediction']].groupby(['actual_class'], as_index=False)[['model1_prediction']].agg(lambda x:x.value_counts().index[0])
class_error['most_common_error']=class_error['model1_prediction']
del class_error['model1_prediction']
class_accuracy=activation_df.groupby(['actual_class'], as_index=False)[['accurate']].mean()
class_accuracy=pd.merge(class_accuracy, class_error)
class_accuracy
| actual_class | accurate | most_common_error | |
|---|---|---|---|
| 0 | 0 | 0.559851 | 6 |
| 1 | 1 | 0.901365 | 7 |
| 2 | 2 | 0.400638 | 8 |
| 3 | 3 | 0.000000 | 8 |
| 4 | 4 | 0.443341 | 9 |
| 5 | 5 | 0.000000 | 2 |
| 6 | 6 | 0.429199 | 0 |
| 7 | 7 | 0.530886 | 1 |
| 8 | 8 | 0.286959 | 2 |
| 9 | 9 | 0.294335 | 7 |
import seaborn as sns
sns.set(rc={'figure.figsize':(15,10)})
sns.boxplot(y='Node1', x='actual_class', data=activation_df[['Node1','actual_class']], width=0.5, palette="colorblind")
<matplotlib.axes._subplots.AxesSubplot at 0x7f14902a9f50>
keras.backend.clear_session()
model2 = Sequential()
model2.add(Dense(2, activation='relu', input_shape=[784]))
model2.add(Dense(10, activation='softmax'))
model2.summary()
model2.compile(optimizer='rmsprop', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense (Dense) (None, 2) 1570 _________________________________________________________________ dense_1 (Dense) (None, 10) 30 ================================================================= Total params: 1,600 Trainable params: 1,600 Non-trainable params: 0 _________________________________________________________________
history2=model2.fit(X_train, y_train, batch_size=32, validation_data=(X_test, y_test), epochs=150)
Epoch 1/150 1875/1875 [==============================] - 3s 1ms/step - loss: 1.8721 - accuracy: 0.2999 - val_loss: 1.4106 - val_accuracy: 0.5006 Epoch 2/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.3676 - accuracy: 0.5171 - val_loss: 1.2600 - val_accuracy: 0.5561 Epoch 3/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.2417 - accuracy: 0.5596 - val_loss: 1.1762 - val_accuracy: 0.5823 Epoch 4/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1681 - accuracy: 0.5896 - val_loss: 1.1519 - val_accuracy: 0.5932 Epoch 5/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1549 - accuracy: 0.5962 - val_loss: 1.1366 - val_accuracy: 0.5997 Epoch 6/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1455 - accuracy: 0.6000 - val_loss: 1.1304 - val_accuracy: 0.6064 Epoch 7/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1380 - accuracy: 0.6084 - val_loss: 1.1231 - val_accuracy: 0.6165 Epoch 8/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1253 - accuracy: 0.6132 - val_loss: 1.1178 - val_accuracy: 0.6105 Epoch 9/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1073 - accuracy: 0.6153 - val_loss: 1.1111 - val_accuracy: 0.6186 Epoch 10/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1067 - accuracy: 0.6199 - val_loss: 1.1105 - val_accuracy: 0.6289 Epoch 11/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.1000 - accuracy: 0.6244 - val_loss: 1.1057 - val_accuracy: 0.6256 Epoch 12/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0918 - accuracy: 0.6233 - val_loss: 1.1131 - val_accuracy: 0.6284 Epoch 13/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0783 - accuracy: 0.6284 - val_loss: 1.0936 - val_accuracy: 0.6335 Epoch 14/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0815 - accuracy: 0.6322 - val_loss: 1.0851 - val_accuracy: 0.6370 Epoch 15/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0694 - accuracy: 0.6373 - val_loss: 1.0812 - val_accuracy: 0.6427 Epoch 16/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0662 - accuracy: 0.6427 - val_loss: 1.0771 - val_accuracy: 0.6512 Epoch 17/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0638 - accuracy: 0.6493 - val_loss: 1.0792 - val_accuracy: 0.6549 Epoch 18/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0597 - accuracy: 0.6463 - val_loss: 1.0718 - val_accuracy: 0.6570 Epoch 19/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0655 - accuracy: 0.6517 - val_loss: 1.0703 - val_accuracy: 0.6497 Epoch 20/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0561 - accuracy: 0.6554 - val_loss: 1.0706 - val_accuracy: 0.6588 Epoch 21/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0508 - accuracy: 0.6559 - val_loss: 1.0647 - val_accuracy: 0.6593 Epoch 22/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0469 - accuracy: 0.6601 - val_loss: 1.0708 - val_accuracy: 0.6603 Epoch 23/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0424 - accuracy: 0.6583 - val_loss: 1.0689 - val_accuracy: 0.6601 Epoch 24/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0461 - accuracy: 0.6624 - val_loss: 1.0637 - val_accuracy: 0.6619 Epoch 25/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0466 - accuracy: 0.6610 - val_loss: 1.0700 - val_accuracy: 0.6529 Epoch 26/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0405 - accuracy: 0.6595 - val_loss: 1.0669 - val_accuracy: 0.6593 Epoch 27/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0336 - accuracy: 0.6643 - val_loss: 1.0643 - val_accuracy: 0.6651 Epoch 28/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0401 - accuracy: 0.6645 - val_loss: 1.0650 - val_accuracy: 0.6653 Epoch 29/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0218 - accuracy: 0.6699 - val_loss: 1.0652 - val_accuracy: 0.6629 Epoch 30/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0347 - accuracy: 0.6659 - val_loss: 1.0682 - val_accuracy: 0.6630 Epoch 31/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0327 - accuracy: 0.6709 - val_loss: 1.0615 - val_accuracy: 0.6705 Epoch 32/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0281 - accuracy: 0.6720 - val_loss: 1.0665 - val_accuracy: 0.6646 Epoch 33/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0225 - accuracy: 0.6733 - val_loss: 1.0590 - val_accuracy: 0.6733 Epoch 34/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0179 - accuracy: 0.6786 - val_loss: 1.0559 - val_accuracy: 0.6720 Epoch 35/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0133 - accuracy: 0.6763 - val_loss: 1.0582 - val_accuracy: 0.6731 Epoch 36/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0108 - accuracy: 0.6794 - val_loss: 1.0591 - val_accuracy: 0.6695 Epoch 37/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0099 - accuracy: 0.6746 - val_loss: 1.0531 - val_accuracy: 0.6743 Epoch 38/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0127 - accuracy: 0.6768 - val_loss: 1.0572 - val_accuracy: 0.6746 Epoch 39/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0193 - accuracy: 0.6768 - val_loss: 1.0558 - val_accuracy: 0.6755 Epoch 40/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0102 - accuracy: 0.6811 - val_loss: 1.0570 - val_accuracy: 0.6751 Epoch 41/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0102 - accuracy: 0.6798 - val_loss: 1.0623 - val_accuracy: 0.6729 Epoch 42/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0070 - accuracy: 0.6794 - val_loss: 1.0545 - val_accuracy: 0.6761 Epoch 43/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0121 - accuracy: 0.6803 - val_loss: 1.0556 - val_accuracy: 0.6786 Epoch 44/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0090 - accuracy: 0.6835 - val_loss: 1.0579 - val_accuracy: 0.6791 Epoch 45/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0063 - accuracy: 0.6828 - val_loss: 1.0654 - val_accuracy: 0.6683 Epoch 46/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0145 - accuracy: 0.6825 - val_loss: 1.0578 - val_accuracy: 0.6827 Epoch 47/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0073 - accuracy: 0.6836 - val_loss: 1.0585 - val_accuracy: 0.6723 Epoch 48/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0119 - accuracy: 0.6794 - val_loss: 1.0555 - val_accuracy: 0.6760 Epoch 49/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0048 - accuracy: 0.6862 - val_loss: 1.0610 - val_accuracy: 0.6747 Epoch 50/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0091 - accuracy: 0.6862 - val_loss: 1.0563 - val_accuracy: 0.6817 Epoch 51/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0069 - accuracy: 0.6824 - val_loss: 1.0641 - val_accuracy: 0.6766 Epoch 52/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0056 - accuracy: 0.6857 - val_loss: 1.0716 - val_accuracy: 0.6702 Epoch 53/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0012 - accuracy: 0.6877 - val_loss: 1.0572 - val_accuracy: 0.6796 Epoch 54/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0067 - accuracy: 0.6824 - val_loss: 1.0577 - val_accuracy: 0.6785 Epoch 55/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0051 - accuracy: 0.6851 - val_loss: 1.0631 - val_accuracy: 0.6757 Epoch 56/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0083 - accuracy: 0.6831 - val_loss: 1.0688 - val_accuracy: 0.6693 Epoch 57/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0113 - accuracy: 0.6820 - val_loss: 1.0579 - val_accuracy: 0.6793 Epoch 58/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0154 - accuracy: 0.6856 - val_loss: 1.0653 - val_accuracy: 0.6741 Epoch 59/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0298 - accuracy: 0.6799 - val_loss: 1.0616 - val_accuracy: 0.6806 Epoch 60/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0079 - accuracy: 0.6828 - val_loss: 1.0656 - val_accuracy: 0.6788 Epoch 61/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0075 - accuracy: 0.6821 - val_loss: 1.0594 - val_accuracy: 0.6800 Epoch 62/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0025 - accuracy: 0.6852 - val_loss: 1.0585 - val_accuracy: 0.6828 Epoch 63/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0011 - accuracy: 0.6874 - val_loss: 1.0660 - val_accuracy: 0.6802 Epoch 64/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0031 - accuracy: 0.6867 - val_loss: 1.0604 - val_accuracy: 0.6773 Epoch 65/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0022 - accuracy: 0.6849 - val_loss: 1.0648 - val_accuracy: 0.6782 Epoch 66/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0126 - accuracy: 0.6831 - val_loss: 1.0680 - val_accuracy: 0.6747 Epoch 67/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0041 - accuracy: 0.6860 - val_loss: 1.0669 - val_accuracy: 0.6797 Epoch 68/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0217 - accuracy: 0.6839 - val_loss: 1.0665 - val_accuracy: 0.6769 Epoch 69/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0105 - accuracy: 0.6816 - val_loss: 1.0644 - val_accuracy: 0.6809 Epoch 70/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0034 - accuracy: 0.6847 - val_loss: 1.0679 - val_accuracy: 0.6784 Epoch 71/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0017 - accuracy: 0.6855 - val_loss: 1.0633 - val_accuracy: 0.6792 Epoch 72/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0116 - accuracy: 0.6846 - val_loss: 1.0679 - val_accuracy: 0.6779 Epoch 73/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0092 - accuracy: 0.6844 - val_loss: 1.0704 - val_accuracy: 0.6744 Epoch 74/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0349 - accuracy: 0.6812 - val_loss: 1.0663 - val_accuracy: 0.6817 Epoch 75/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0232 - accuracy: 0.6836 - val_loss: 1.0637 - val_accuracy: 0.6798 Epoch 76/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0109 - accuracy: 0.6852 - val_loss: 1.0664 - val_accuracy: 0.6795 Epoch 77/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0042 - accuracy: 0.6882 - val_loss: 1.0648 - val_accuracy: 0.6828 Epoch 78/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0093 - accuracy: 0.6888 - val_loss: 1.0667 - val_accuracy: 0.6836 Epoch 79/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0182 - accuracy: 0.6841 - val_loss: 1.0655 - val_accuracy: 0.6849 Epoch 80/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0135 - accuracy: 0.6886 - val_loss: 1.0704 - val_accuracy: 0.6804 Epoch 81/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0189 - accuracy: 0.6851 - val_loss: 1.0700 - val_accuracy: 0.6774 Epoch 82/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0089 - accuracy: 0.6849 - val_loss: 1.0731 - val_accuracy: 0.6836 Epoch 83/150 1875/1875 [==============================] - 2s 1ms/step - loss: 0.9992 - accuracy: 0.6864 - val_loss: 1.0729 - val_accuracy: 0.6758 Epoch 84/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0222 - accuracy: 0.6844 - val_loss: 1.0704 - val_accuracy: 0.6839 Epoch 85/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0011 - accuracy: 0.6884 - val_loss: 1.0803 - val_accuracy: 0.6735 Epoch 86/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0140 - accuracy: 0.6864 - val_loss: 1.0738 - val_accuracy: 0.6763 Epoch 87/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0167 - accuracy: 0.6897 - val_loss: 1.0755 - val_accuracy: 0.6738 Epoch 88/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0258 - accuracy: 0.6835 - val_loss: 1.0722 - val_accuracy: 0.6800 Epoch 89/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0074 - accuracy: 0.6882 - val_loss: 1.0690 - val_accuracy: 0.6776 Epoch 90/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0384 - accuracy: 0.6819 - val_loss: 1.0792 - val_accuracy: 0.6718 Epoch 91/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0138 - accuracy: 0.6860 - val_loss: 1.0824 - val_accuracy: 0.6733 Epoch 92/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0052 - accuracy: 0.6856 - val_loss: 1.0698 - val_accuracy: 0.6831 Epoch 93/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0138 - accuracy: 0.6863 - val_loss: 1.0874 - val_accuracy: 0.6701 Epoch 94/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0198 - accuracy: 0.6883 - val_loss: 1.0697 - val_accuracy: 0.6826 Epoch 95/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0151 - accuracy: 0.6859 - val_loss: 1.0730 - val_accuracy: 0.6810 Epoch 96/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0160 - accuracy: 0.6888 - val_loss: 1.0725 - val_accuracy: 0.6766 Epoch 97/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0138 - accuracy: 0.6832 - val_loss: 1.0721 - val_accuracy: 0.6798 Epoch 98/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0145 - accuracy: 0.6858 - val_loss: 1.0724 - val_accuracy: 0.6799 Epoch 99/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0234 - accuracy: 0.6829 - val_loss: 1.0782 - val_accuracy: 0.6742 Epoch 100/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0265 - accuracy: 0.6863 - val_loss: 1.0744 - val_accuracy: 0.6807 Epoch 101/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0122 - accuracy: 0.6866 - val_loss: 1.0803 - val_accuracy: 0.6750 Epoch 102/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0158 - accuracy: 0.6886 - val_loss: 1.0741 - val_accuracy: 0.6809 Epoch 103/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0274 - accuracy: 0.6818 - val_loss: 1.0733 - val_accuracy: 0.6846 Epoch 104/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0122 - accuracy: 0.6849 - val_loss: 1.0738 - val_accuracy: 0.6799 Epoch 105/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0358 - accuracy: 0.6842 - val_loss: 1.0747 - val_accuracy: 0.6773 Epoch 106/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0314 - accuracy: 0.6799 - val_loss: 1.0726 - val_accuracy: 0.6803 Epoch 107/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0178 - accuracy: 0.6833 - val_loss: 1.0720 - val_accuracy: 0.6810 Epoch 108/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0267 - accuracy: 0.6856 - val_loss: 1.0785 - val_accuracy: 0.6790 Epoch 109/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0299 - accuracy: 0.6816 - val_loss: 1.0765 - val_accuracy: 0.6804 Epoch 110/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0152 - accuracy: 0.6872 - val_loss: 1.0745 - val_accuracy: 0.6757 Epoch 111/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0325 - accuracy: 0.6831 - val_loss: 1.0761 - val_accuracy: 0.6813 Epoch 112/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0062 - accuracy: 0.6864 - val_loss: 1.0693 - val_accuracy: 0.6826 Epoch 113/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0093 - accuracy: 0.6874 - val_loss: 1.0727 - val_accuracy: 0.6806 Epoch 114/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0249 - accuracy: 0.6852 - val_loss: 1.0757 - val_accuracy: 0.6828 Epoch 115/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0298 - accuracy: 0.6834 - val_loss: 1.0748 - val_accuracy: 0.6755 Epoch 116/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0046 - accuracy: 0.6873 - val_loss: 1.0740 - val_accuracy: 0.6813 Epoch 117/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0212 - accuracy: 0.6851 - val_loss: 1.0768 - val_accuracy: 0.6821 Epoch 118/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0175 - accuracy: 0.6897 - val_loss: 1.0774 - val_accuracy: 0.6798 Epoch 119/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0272 - accuracy: 0.6829 - val_loss: 1.0766 - val_accuracy: 0.6766 Epoch 120/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0255 - accuracy: 0.6856 - val_loss: 1.0773 - val_accuracy: 0.6779 Epoch 121/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0163 - accuracy: 0.6869 - val_loss: 1.0734 - val_accuracy: 0.6840 Epoch 122/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0191 - accuracy: 0.6865 - val_loss: 1.0749 - val_accuracy: 0.6838 Epoch 123/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0268 - accuracy: 0.6836 - val_loss: 1.0948 - val_accuracy: 0.6692 Epoch 124/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0172 - accuracy: 0.6863 - val_loss: 1.0733 - val_accuracy: 0.6837 Epoch 125/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0221 - accuracy: 0.6875 - val_loss: 1.0837 - val_accuracy: 0.6700 Epoch 126/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0298 - accuracy: 0.6868 - val_loss: 1.0728 - val_accuracy: 0.6828 Epoch 127/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0247 - accuracy: 0.6834 - val_loss: 1.0781 - val_accuracy: 0.6832 Epoch 128/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0131 - accuracy: 0.6872 - val_loss: 1.0742 - val_accuracy: 0.6866 Epoch 129/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0118 - accuracy: 0.6877 - val_loss: 1.0746 - val_accuracy: 0.6808 Epoch 130/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0305 - accuracy: 0.6859 - val_loss: 1.0777 - val_accuracy: 0.6819 Epoch 131/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0225 - accuracy: 0.6878 - val_loss: 1.0749 - val_accuracy: 0.6791 Epoch 132/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0118 - accuracy: 0.6856 - val_loss: 1.0759 - val_accuracy: 0.6819 Epoch 133/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0176 - accuracy: 0.6877 - val_loss: 1.0827 - val_accuracy: 0.6740 Epoch 134/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0241 - accuracy: 0.6843 - val_loss: 1.0826 - val_accuracy: 0.6772 Epoch 135/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0087 - accuracy: 0.6864 - val_loss: 1.0826 - val_accuracy: 0.6808 Epoch 136/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0238 - accuracy: 0.6877 - val_loss: 1.0739 - val_accuracy: 0.6799 Epoch 137/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0315 - accuracy: 0.6835 - val_loss: 1.0790 - val_accuracy: 0.6823 Epoch 138/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0077 - accuracy: 0.6874 - val_loss: 1.0776 - val_accuracy: 0.6805 Epoch 139/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0014 - accuracy: 0.6886 - val_loss: 1.0775 - val_accuracy: 0.6790 Epoch 140/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0201 - accuracy: 0.6840 - val_loss: 1.0802 - val_accuracy: 0.6807 Epoch 141/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0237 - accuracy: 0.6834 - val_loss: 1.0862 - val_accuracy: 0.6752 Epoch 142/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0144 - accuracy: 0.6847 - val_loss: 1.0760 - val_accuracy: 0.6813 Epoch 143/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0162 - accuracy: 0.6847 - val_loss: 1.0771 - val_accuracy: 0.6826 Epoch 144/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0112 - accuracy: 0.6891 - val_loss: 1.0760 - val_accuracy: 0.6851 Epoch 145/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0162 - accuracy: 0.6874 - val_loss: 1.0782 - val_accuracy: 0.6825 Epoch 146/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0232 - accuracy: 0.6879 - val_loss: 1.0790 - val_accuracy: 0.6809 Epoch 147/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0409 - accuracy: 0.6851 - val_loss: 1.0785 - val_accuracy: 0.6806 Epoch 148/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0017 - accuracy: 0.6855 - val_loss: 1.0828 - val_accuracy: 0.6773 Epoch 149/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0261 - accuracy: 0.6838 - val_loss: 1.0776 - val_accuracy: 0.6808 Epoch 150/150 1875/1875 [==============================] - 2s 1ms/step - loss: 1.0232 - accuracy: 0.6886 - val_loss: 1.0779 - val_accuracy: 0.6822
from matplotlib.pyplot import figure
plt.figure(figsize=(15,15))
history2_df=pd.DataFrame(history2.history)
plt.plot(history2_df, linewidth=5)
plt.legend(history2_df.columns.to_list())
plt.show()
y_pred2 = model2.predict_classes(X_test)
plt.figure(figsize=(15,15))
for i in range(16):
plt.subplot(4,4, i+1)
plt.imshow(X_test[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_pred2[i]), fontsize=15)
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
layer_outputs= [layer.output for layer in model2.layers]
activation_model2=models.Model(inputs=model2.input, outputs=layer_outputs)
print(f"There are {len(layer_outputs)} layers")
There are 2 layers
activations2 = activation_model2.predict(X_train)
hidden_layer_activation2 = activations2[0]
output_layer_activations2 = activations2[1]
print(hidden_layer_activation2.shape)
print(f"The maximum activation value of the hidden nodes in the hidden layer is {hidden_layer_activation2.max()}")
(60000, 2) The maximum activation value of the hidden nodes in the hidden layer is 252.01443481445312
y_train_pred.shape
(60000,)
y_train_pred2=model2.predict_classes(X_train)
activation_data2 = {'actual_class':y_train}
activation_data2["Node1"] = hidden_layer_activation2[:,0]
activation_data2["Node2"] = hidden_layer_activation2[:,1]
activation_df2 = pd.DataFrame(activation_data2)
for i in range(0,10):
activation_df2['Output_Node_{}'.format(i)]=output_layer_activations2[:,i]
activation_df2['model2_prediction']=y_train_pred2
plt.figure(figsize=(15,15))
for i in range(0,16):
plt.subplot(4,4, i+1)
plt.imshow(X_train[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_train_pred2[i]), fontsize=15)
display(activation_df2.round(3).T.iloc[:,0:16])
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| actual_class | 5.000 | 0.000 | 4.000 | 1.000 | 9.000 | 2.000 | 1.000 | 3.000 | 1.000 | 4.000 | 3.000 | 5.000 | 3.000000 | 6.000 | 1.000 | 7.000 |
| Node1 | 11.013 | 1.413 | 1.300 | 9.181 | 0.000 | 14.097 | 9.978 | 15.340 | 7.220 | 0.000 | 13.787 | 13.039 | 23.481001 | 79.473 | 9.256 | 0.000 |
| Node2 | 5.882 | 8.389 | 4.604 | 2.025 | 3.061 | 8.151 | 0.722 | 8.071 | 0.856 | 5.482 | 6.221 | 7.439 | 10.914000 | 0.000 | 0.836 | 0.410 |
| Output_Node_0 | 0.001 | 0.727 | 0.013 | 0.000 | 0.000 | 0.003 | 0.000 | 0.001 | 0.000 | 0.042 | 0.000 | 0.002 | 0.000000 | 0.000 | 0.000 | 0.000 |
| Output_Node_1 | 0.004 | 0.000 | 0.000 | 0.760 | 0.002 | 0.000 | 0.957 | 0.000 | 0.914 | 0.000 | 0.002 | 0.000 | 0.000000 | 0.000 | 0.950 | 0.009 |
| Output_Node_2 | 0.119 | 0.000 | 0.000 | 0.035 | 0.000 | 0.132 | 0.006 | 0.188 | 0.004 | 0.000 | 0.211 | 0.127 | 0.391000 | 0.061 | 0.007 | 0.000 |
| Output_Node_3 | 0.276 | 0.000 | 0.004 | 0.028 | 0.001 | 0.377 | 0.003 | 0.402 | 0.003 | 0.001 | 0.309 | 0.346 | 0.442000 | 0.000 | 0.003 | 0.000 |
| Output_Node_4 | 0.001 | 0.237 | 0.677 | 0.000 | 0.237 | 0.000 | 0.000 | 0.000 | 0.000 | 0.837 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.001 |
| Output_Node_5 | 0.111 | 0.034 | 0.054 | 0.002 | 0.006 | 0.185 | 0.000 | 0.105 | 0.000 | 0.029 | 0.044 | 0.166 | 0.021000 | 0.000 | 0.000 | 0.000 |
| Output_Node_6 | 0.107 | 0.000 | 0.000 | 0.090 | 0.000 | 0.066 | 0.023 | 0.098 | 0.015 | 0.000 | 0.180 | 0.076 | 0.105000 | 0.939 | 0.023 | 0.000 |
| Output_Node_7 | 0.000 | 0.000 | 0.005 | 0.004 | 0.104 | 0.000 | 0.003 | 0.000 | 0.045 | 0.000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.006 | 0.872 |
| Output_Node_8 | 0.381 | 0.001 | 0.032 | 0.080 | 0.009 | 0.238 | 0.007 | 0.206 | 0.016 | 0.006 | 0.255 | 0.283 | 0.041000 | 0.000 | 0.010 | 0.000 |
| Output_Node_9 | 0.000 | 0.000 | 0.213 | 0.001 | 0.642 | 0.000 | 0.000 | 0.000 | 0.002 | 0.085 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.117 |
| model2_prediction | 8.000 | 0.000 | 4.000 | 1.000 | 9.000 | 3.000 | 1.000 | 3.000 | 1.000 | 4.000 | 3.000 | 3.000 | 3.000000 | 6.000 | 1.000 | 7.000 |
activation_df2['accurate']=np.where(activation_df2['actual_class']==activation_df2['model2_prediction'],1,0)
class_error2=activation_df2[activation_df2['actual_class']!=activation_df2['model2_prediction']].groupby(['actual_class'], as_index=False)[['model2_prediction']].agg(lambda x:x.value_counts().index[0])
class_error2['most_common_error']=class_error2['model2_prediction']
del class_error2['model2_prediction']
class_accuracy2=activation_df2.groupby(['actual_class'], as_index=False)[['accurate']].mean()
class_accuracy2=pd.merge(class_accuracy2, class_error2)
class_accuracy2
| actual_class | accurate | most_common_error | |
|---|---|---|---|
| 0 | 0 | 0.814621 | 5 |
| 1 | 1 | 0.947197 | 8 |
| 2 | 2 | 0.529372 | 3 |
| 3 | 3 | 0.504649 | 8 |
| 4 | 4 | 0.741527 | 9 |
| 5 | 5 | 0.575171 | 8 |
| 6 | 6 | 0.663231 | 2 |
| 7 | 7 | 0.802235 | 9 |
| 8 | 8 | 0.580072 | 3 |
| 9 | 9 | 0.683308 | 7 |
| actual_class | variable | Node | |
|---|---|---|---|
| 0 | 5 | Node1 | 11.012856 |
| 1 | 0 | Node1 | 1.412916 |
| 2 | 4 | Node1 | 1.299680 |
| 3 | 1 | Node1 | 9.181105 |
| 4 | 9 | Node1 | 0.000000 |
| ... | ... | ... | ... |
| 119995 | 8 | Node2 | 6.021877 |
| 119996 | 3 | Node2 | 12.952602 |
| 119997 | 5 | Node2 | 5.669095 |
| 119998 | 6 | Node2 | 0.000000 |
| 119999 | 8 | Node2 | 5.690435 |
120000 rows × 3 columns
import seaborn as sns
from matplotlib.pyplot import figure
fig, axes = plt.subplots(1,1)
sns.set(rc={'figure.figsize':(30,20)})
sns.boxplot(y='Node', x='actual_class', data=pd.melt(activation_df2, id_vars=['actual_class'], value_vars=['Node1', 'Node2'], value_name='Node'), hue='variable', width=0.5, palette="colorblind")
plt.show()
plt.figure(figsize=(20,12))
sns.scatterplot(
x="Node1", y="Node2",
hue="actual_class",
palette=sns.color_palette("hls", 10),
data=activation_df2,
legend="full",
alpha=0.3
);
best_model = Sequential()
best_model.add(Dense(256, activation='relu', input_shape=[784]))
best_model.add(Dense(10, activation='softmax'))
best_model.summary()
best_model.compile(optimizer='rmsprop', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_2 (Dense) (None, 256) 200960 _________________________________________________________________ dense_3 (Dense) (None, 10) 2570 ================================================================= Total params: 203,530 Trainable params: 203,530 Non-trainable params: 0 _________________________________________________________________
best_history=best_model.fit(X_train, y_train, batch_size=32, validation_data=(X_test, y_test), epochs=150)
Epoch 1/150 1875/1875 [==============================] - 6s 3ms/step - loss: 0.3619 - accuracy: 0.8959 - val_loss: 0.1191 - val_accuracy: 0.9631 Epoch 2/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.1041 - accuracy: 0.9698 - val_loss: 0.0995 - val_accuracy: 0.9698 Epoch 3/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0725 - accuracy: 0.9783 - val_loss: 0.0935 - val_accuracy: 0.9747 Epoch 4/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0593 - accuracy: 0.9841 - val_loss: 0.0790 - val_accuracy: 0.9784 Epoch 5/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0449 - accuracy: 0.9876 - val_loss: 0.0899 - val_accuracy: 0.9771 Epoch 6/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0389 - accuracy: 0.9896 - val_loss: 0.0901 - val_accuracy: 0.9778 Epoch 7/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0309 - accuracy: 0.9916 - val_loss: 0.0928 - val_accuracy: 0.9786 Epoch 8/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0244 - accuracy: 0.9931 - val_loss: 0.0916 - val_accuracy: 0.9798 Epoch 9/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0210 - accuracy: 0.9939 - val_loss: 0.0959 - val_accuracy: 0.9803 Epoch 10/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0187 - accuracy: 0.9949 - val_loss: 0.1118 - val_accuracy: 0.9763 Epoch 11/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0172 - accuracy: 0.9959 - val_loss: 0.1031 - val_accuracy: 0.9793 Epoch 12/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0138 - accuracy: 0.9964 - val_loss: 0.1025 - val_accuracy: 0.9794 Epoch 13/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0111 - accuracy: 0.9969 - val_loss: 0.1078 - val_accuracy: 0.9802 Epoch 14/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0096 - accuracy: 0.9972 - val_loss: 0.1180 - val_accuracy: 0.9785 Epoch 15/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0085 - accuracy: 0.9978 - val_loss: 0.1159 - val_accuracy: 0.9799 Epoch 16/150 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0077 - accuracy: 0.9982 - val_loss: 0.1149 - val_accuracy: 0.9814 Epoch 17/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0064 - accuracy: 0.9986 - val_loss: 0.1205 - val_accuracy: 0.9795 Epoch 18/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0054 - accuracy: 0.9987 - val_loss: 0.1304 - val_accuracy: 0.9796 Epoch 19/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0039 - accuracy: 0.9990 - val_loss: 0.1283 - val_accuracy: 0.9798 Epoch 20/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0038 - accuracy: 0.9989 - val_loss: 0.1277 - val_accuracy: 0.9800 Epoch 21/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0044 - accuracy: 0.9989 - val_loss: 0.1407 - val_accuracy: 0.9803 Epoch 22/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0034 - accuracy: 0.9992 - val_loss: 0.1475 - val_accuracy: 0.9790 Epoch 23/150 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0020 - accuracy: 0.9994 - val_loss: 0.1480 - val_accuracy: 0.9803 Epoch 24/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0018 - accuracy: 0.9996 - val_loss: 0.1691 - val_accuracy: 0.9782 Epoch 25/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0017 - accuracy: 0.9994 - val_loss: 0.1521 - val_accuracy: 0.9793 Epoch 26/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0011 - accuracy: 0.9997 - val_loss: 0.1668 - val_accuracy: 0.9787 Epoch 27/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0015 - accuracy: 0.9997 - val_loss: 0.1690 - val_accuracy: 0.9798 Epoch 28/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0015 - accuracy: 0.9996 - val_loss: 0.1663 - val_accuracy: 0.9791 Epoch 29/150 1875/1875 [==============================] - 5s 3ms/step - loss: 0.0011 - accuracy: 0.9997 - val_loss: 0.1743 - val_accuracy: 0.9786 Epoch 30/150 1875/1875 [==============================] - 5s 3ms/step - loss: 7.6759e-04 - accuracy: 0.9998 - val_loss: 0.1746 - val_accuracy: 0.9791 Epoch 31/150 1875/1875 [==============================] - 5s 3ms/step - loss: 8.6628e-04 - accuracy: 0.9998 - val_loss: 0.1725 - val_accuracy: 0.9798 Epoch 32/150 1875/1875 [==============================] - 5s 3ms/step - loss: 7.5875e-04 - accuracy: 0.9998 - val_loss: 0.1797 - val_accuracy: 0.9789 Epoch 33/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.8208e-04 - accuracy: 0.9997 - val_loss: 0.1924 - val_accuracy: 0.9784 Epoch 34/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.1343e-04 - accuracy: 0.9999 - val_loss: 0.1780 - val_accuracy: 0.9795 Epoch 35/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1087e-04 - accuracy: 0.9998 - val_loss: 0.1933 - val_accuracy: 0.9790 Epoch 36/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.1054e-04 - accuracy: 0.9999 - val_loss: 0.1944 - val_accuracy: 0.9796 Epoch 37/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8723e-04 - accuracy: 0.9999 - val_loss: 0.1894 - val_accuracy: 0.9787 Epoch 38/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.1176e-04 - accuracy: 0.9998 - val_loss: 0.2010 - val_accuracy: 0.9798 Epoch 39/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.1288e-04 - accuracy: 1.0000 - val_loss: 0.1984 - val_accuracy: 0.9801 Epoch 40/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.2422e-05 - accuracy: 1.0000 - val_loss: 0.2016 - val_accuracy: 0.9802 Epoch 41/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.8096e-05 - accuracy: 1.0000 - val_loss: 0.2060 - val_accuracy: 0.9802 Epoch 42/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.8123e-05 - accuracy: 1.0000 - val_loss: 0.2059 - val_accuracy: 0.9799 Epoch 43/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.1603e-05 - accuracy: 1.0000 - val_loss: 0.2134 - val_accuracy: 0.9794 Epoch 44/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8620e-06 - accuracy: 1.0000 - val_loss: 0.2138 - val_accuracy: 0.9799 Epoch 45/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7326e-05 - accuracy: 1.0000 - val_loss: 0.2139 - val_accuracy: 0.9810 Epoch 46/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.5681e-06 - accuracy: 1.0000 - val_loss: 0.2135 - val_accuracy: 0.9796 Epoch 47/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.4785e-05 - accuracy: 1.0000 - val_loss: 0.2183 - val_accuracy: 0.9798 Epoch 48/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.0739e-07 - accuracy: 1.0000 - val_loss: 0.2271 - val_accuracy: 0.9795 Epoch 49/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7737e-06 - accuracy: 1.0000 - val_loss: 0.2284 - val_accuracy: 0.9801 Epoch 50/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.3454e-07 - accuracy: 1.0000 - val_loss: 0.2170 - val_accuracy: 0.9805 Epoch 51/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7751e-07 - accuracy: 1.0000 - val_loss: 0.2257 - val_accuracy: 0.9807 Epoch 52/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.4758e-08 - accuracy: 1.0000 - val_loss: 0.2278 - val_accuracy: 0.9805 Epoch 53/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.9608e-08 - accuracy: 1.0000 - val_loss: 0.2253 - val_accuracy: 0.9807 Epoch 54/150 1875/1875 [==============================] - 5s 3ms/step - loss: 9.8418e-09 - accuracy: 1.0000 - val_loss: 0.2278 - val_accuracy: 0.9804 Epoch 55/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.8058e-08 - accuracy: 1.0000 - val_loss: 0.2287 - val_accuracy: 0.9806 Epoch 56/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1290e-09 - accuracy: 1.0000 - val_loss: 0.2315 - val_accuracy: 0.9800 Epoch 57/150 1875/1875 [==============================] - 5s 3ms/step - loss: 7.4555e-09 - accuracy: 1.0000 - val_loss: 0.2331 - val_accuracy: 0.9803 Epoch 58/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.8264e-09 - accuracy: 1.0000 - val_loss: 0.2324 - val_accuracy: 0.9804 Epoch 59/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.7078e-09 - accuracy: 1.0000 - val_loss: 0.2329 - val_accuracy: 0.9803 Epoch 60/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.8717e-09 - accuracy: 1.0000 - val_loss: 0.2346 - val_accuracy: 0.9807 Epoch 61/150 1875/1875 [==============================] - 6s 3ms/step - loss: 5.1350e-09 - accuracy: 1.0000 - val_loss: 0.2363 - val_accuracy: 0.9805 Epoch 62/150 1875/1875 [==============================] - 6s 3ms/step - loss: 4.6230e-09 - accuracy: 1.0000 - val_loss: 0.2394 - val_accuracy: 0.9799 Epoch 63/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.4913e-09 - accuracy: 1.0000 - val_loss: 0.2393 - val_accuracy: 0.9800 Epoch 64/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.4006e-09 - accuracy: 1.0000 - val_loss: 0.2394 - val_accuracy: 0.9799 Epoch 65/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.3084e-09 - accuracy: 1.0000 - val_loss: 0.2410 - val_accuracy: 0.9795 Epoch 66/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.3707e-09 - accuracy: 1.0000 - val_loss: 0.2408 - val_accuracy: 0.9796 Epoch 67/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.8134e-09 - accuracy: 1.0000 - val_loss: 0.2438 - val_accuracy: 0.9799 Epoch 68/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.2280e-09 - accuracy: 1.0000 - val_loss: 0.2450 - val_accuracy: 0.9793 Epoch 69/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.5881e-09 - accuracy: 1.0000 - val_loss: 0.2418 - val_accuracy: 0.9804 Epoch 70/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.2290e-09 - accuracy: 1.0000 - val_loss: 0.2425 - val_accuracy: 0.9796 Epoch 71/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.4696e-09 - accuracy: 1.0000 - val_loss: 0.2465 - val_accuracy: 0.9801 Epoch 72/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.7615e-09 - accuracy: 1.0000 - val_loss: 0.2457 - val_accuracy: 0.9798 Epoch 73/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.5222e-09 - accuracy: 1.0000 - val_loss: 0.2480 - val_accuracy: 0.9800 Epoch 74/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.9630e-09 - accuracy: 1.0000 - val_loss: 0.2455 - val_accuracy: 0.9797 Epoch 75/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.4489e-08 - accuracy: 1.0000 - val_loss: 0.2442 - val_accuracy: 0.9795 Epoch 76/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.7008e-09 - accuracy: 1.0000 - val_loss: 0.2487 - val_accuracy: 0.9799 Epoch 77/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.8640e-09 - accuracy: 1.0000 - val_loss: 0.2494 - val_accuracy: 0.9794 Epoch 78/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1259e-09 - accuracy: 1.0000 - val_loss: 0.2491 - val_accuracy: 0.9795 Epoch 79/150 1875/1875 [==============================] - 5s 3ms/step - loss: 8.7429e-09 - accuracy: 1.0000 - val_loss: 0.2501 - val_accuracy: 0.9795 Epoch 80/150 1875/1875 [==============================] - 5s 3ms/step - loss: 8.4172e-09 - accuracy: 1.0000 - val_loss: 0.2494 - val_accuracy: 0.9798 Epoch 81/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.2893e-09 - accuracy: 1.0000 - val_loss: 0.2518 - val_accuracy: 0.9796 Epoch 82/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.6618e-08 - accuracy: 1.0000 - val_loss: 0.2526 - val_accuracy: 0.9803 Epoch 83/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.5592e-09 - accuracy: 1.0000 - val_loss: 0.2522 - val_accuracy: 0.9803 Epoch 84/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1182e-09 - accuracy: 1.0000 - val_loss: 0.2538 - val_accuracy: 0.9795 Epoch 85/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1560e-09 - accuracy: 1.0000 - val_loss: 0.2507 - val_accuracy: 0.9800 Epoch 86/150 1875/1875 [==============================] - 5s 3ms/step - loss: 7.0130e-08 - accuracy: 1.0000 - val_loss: 0.2571 - val_accuracy: 0.9794 Epoch 87/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.9836e-06 - accuracy: 1.0000 - val_loss: 0.2546 - val_accuracy: 0.9787 Epoch 88/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.9957e-05 - accuracy: 1.0000 - val_loss: 0.2548 - val_accuracy: 0.9793 Epoch 89/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.6032e-05 - accuracy: 1.0000 - val_loss: 0.2542 - val_accuracy: 0.9786 Epoch 90/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.2449e-07 - accuracy: 1.0000 - val_loss: 0.2454 - val_accuracy: 0.9799 Epoch 91/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.0630e-08 - accuracy: 1.0000 - val_loss: 0.2465 - val_accuracy: 0.9803 Epoch 92/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.5139e-09 - accuracy: 1.0000 - val_loss: 0.2485 - val_accuracy: 0.9798 Epoch 93/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.0173e-09 - accuracy: 1.0000 - val_loss: 0.2515 - val_accuracy: 0.9800 Epoch 94/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.2652e-09 - accuracy: 1.0000 - val_loss: 0.2520 - val_accuracy: 0.9798 Epoch 95/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.3967e-09 - accuracy: 1.0000 - val_loss: 0.2560 - val_accuracy: 0.9794 Epoch 96/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.7932e-09 - accuracy: 1.0000 - val_loss: 0.2543 - val_accuracy: 0.9796 Epoch 97/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.1413e-08 - accuracy: 1.0000 - val_loss: 0.2537 - val_accuracy: 0.9794 Epoch 98/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.6934e-09 - accuracy: 1.0000 - val_loss: 0.2574 - val_accuracy: 0.9791 Epoch 99/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.5193e-08 - accuracy: 1.0000 - val_loss: 0.2712 - val_accuracy: 0.9789 Epoch 100/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7909e-07 - accuracy: 1.0000 - val_loss: 0.2576 - val_accuracy: 0.9797 Epoch 101/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.0758e-07 - accuracy: 1.0000 - val_loss: 0.2549 - val_accuracy: 0.9790 Epoch 102/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.8433e-09 - accuracy: 1.0000 - val_loss: 0.2558 - val_accuracy: 0.9797 Epoch 103/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.4968e-09 - accuracy: 1.0000 - val_loss: 0.2600 - val_accuracy: 0.9794 Epoch 104/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.0996e-08 - accuracy: 1.0000 - val_loss: 0.2573 - val_accuracy: 0.9790 Epoch 105/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.1315e-09 - accuracy: 1.0000 - val_loss: 0.2616 - val_accuracy: 0.9793 Epoch 106/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.4052e-09 - accuracy: 1.0000 - val_loss: 0.2596 - val_accuracy: 0.9791 Epoch 107/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.5948e-09 - accuracy: 1.0000 - val_loss: 0.2631 - val_accuracy: 0.9789 Epoch 108/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.3246e-09 - accuracy: 1.0000 - val_loss: 0.2651 - val_accuracy: 0.9793 Epoch 109/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.1970e-08 - accuracy: 1.0000 - val_loss: 0.2617 - val_accuracy: 0.9793 Epoch 110/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.2467e-06 - accuracy: 1.0000 - val_loss: 0.2633 - val_accuracy: 0.9792 Epoch 111/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.4510e-06 - accuracy: 1.0000 - val_loss: 0.2580 - val_accuracy: 0.9789 Epoch 112/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.5978e-06 - accuracy: 1.0000 - val_loss: 0.2609 - val_accuracy: 0.9797 Epoch 113/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.6755e-05 - accuracy: 1.0000 - val_loss: 0.2527 - val_accuracy: 0.9783 Epoch 114/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.7312e-06 - accuracy: 1.0000 - val_loss: 0.2612 - val_accuracy: 0.9796 Epoch 115/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.9101e-08 - accuracy: 1.0000 - val_loss: 0.2551 - val_accuracy: 0.9800 Epoch 116/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.8170e-09 - accuracy: 1.0000 - val_loss: 0.2560 - val_accuracy: 0.9799 Epoch 117/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.4778e-09 - accuracy: 1.0000 - val_loss: 0.2607 - val_accuracy: 0.9794 Epoch 118/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8497e-09 - accuracy: 1.0000 - val_loss: 0.2589 - val_accuracy: 0.9797 Epoch 119/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.0020e-09 - accuracy: 1.0000 - val_loss: 0.2594 - val_accuracy: 0.9800 Epoch 120/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.5827e-09 - accuracy: 1.0000 - val_loss: 0.2659 - val_accuracy: 0.9793 Epoch 121/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.4611e-09 - accuracy: 1.0000 - val_loss: 0.2666 - val_accuracy: 0.9794 Epoch 122/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.2423e-09 - accuracy: 1.0000 - val_loss: 0.2672 - val_accuracy: 0.9795 Epoch 123/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.1718e-09 - accuracy: 1.0000 - val_loss: 0.2671 - val_accuracy: 0.9794 Epoch 124/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.7216e-09 - accuracy: 1.0000 - val_loss: 0.2705 - val_accuracy: 0.9789 Epoch 125/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.3014e-05 - accuracy: 1.0000 - val_loss: 0.2618 - val_accuracy: 0.9793 Epoch 126/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.3181e-04 - accuracy: 1.0000 - val_loss: 0.2721 - val_accuracy: 0.9782 Epoch 127/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8196e-05 - accuracy: 1.0000 - val_loss: 0.2658 - val_accuracy: 0.9787 Epoch 128/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.1060e-07 - accuracy: 1.0000 - val_loss: 0.2714 - val_accuracy: 0.9787 Epoch 129/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.0962e-08 - accuracy: 1.0000 - val_loss: 0.2578 - val_accuracy: 0.9798 Epoch 130/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7811e-08 - accuracy: 1.0000 - val_loss: 0.2571 - val_accuracy: 0.9804 Epoch 131/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.7811e-09 - accuracy: 1.0000 - val_loss: 0.2584 - val_accuracy: 0.9797 Epoch 132/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.9312e-09 - accuracy: 1.0000 - val_loss: 0.2617 - val_accuracy: 0.9801 Epoch 133/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.2916e-09 - accuracy: 1.0000 - val_loss: 0.2657 - val_accuracy: 0.9801 Epoch 134/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.6446e-09 - accuracy: 1.0000 - val_loss: 0.2697 - val_accuracy: 0.9799 Epoch 135/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.2194e-09 - accuracy: 1.0000 - val_loss: 0.2690 - val_accuracy: 0.9790 Epoch 136/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.0605e-09 - accuracy: 1.0000 - val_loss: 0.2699 - val_accuracy: 0.9800 Epoch 137/150 1875/1875 [==============================] - 5s 3ms/step - loss: 5.7348e-07 - accuracy: 1.0000 - val_loss: 0.2746 - val_accuracy: 0.9792 Epoch 138/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.9092e-06 - accuracy: 1.0000 - val_loss: 0.2667 - val_accuracy: 0.9781 Epoch 139/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.4375e-08 - accuracy: 1.0000 - val_loss: 0.2636 - val_accuracy: 0.9788 Epoch 140/150 1875/1875 [==============================] - 5s 3ms/step - loss: 7.9967e-09 - accuracy: 1.0000 - val_loss: 0.2624 - val_accuracy: 0.9798 Epoch 141/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.1835e-09 - accuracy: 1.0000 - val_loss: 0.2675 - val_accuracy: 0.9794 Epoch 142/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8554e-09 - accuracy: 1.0000 - val_loss: 0.2705 - val_accuracy: 0.9796 Epoch 143/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.8850e-09 - accuracy: 1.0000 - val_loss: 0.2727 - val_accuracy: 0.9799 Epoch 144/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.9333e-09 - accuracy: 1.0000 - val_loss: 0.2713 - val_accuracy: 0.9793 Epoch 145/150 1875/1875 [==============================] - 5s 3ms/step - loss: 2.7580e-09 - accuracy: 1.0000 - val_loss: 0.2784 - val_accuracy: 0.9792 Epoch 146/150 1875/1875 [==============================] - 5s 3ms/step - loss: 6.2706e-08 - accuracy: 1.0000 - val_loss: 0.2778 - val_accuracy: 0.9795 Epoch 147/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.1352e-04 - accuracy: 1.0000 - val_loss: 0.2736 - val_accuracy: 0.9796 Epoch 148/150 1875/1875 [==============================] - 5s 3ms/step - loss: 3.8087e-06 - accuracy: 1.0000 - val_loss: 0.2737 - val_accuracy: 0.9780 Epoch 149/150 1875/1875 [==============================] - 5s 3ms/step - loss: 1.3254e-05 - accuracy: 1.0000 - val_loss: 0.2758 - val_accuracy: 0.9790 Epoch 150/150 1875/1875 [==============================] - 5s 3ms/step - loss: 4.5937e-07 - accuracy: 1.0000 - val_loss: 0.2684 - val_accuracy: 0.9800
from matplotlib.pyplot import figure
plt.figure(figsize=(15,15))
best_history_df=pd.DataFrame(best_history.history)
plt.plot(best_history_df, linewidth=5)
plt.legend(best_history_df.columns.to_list())
plt.show()
y_pred_best = best_model.predict_classes(X_test)
plt.figure(figsize=(15,15))
for i in range(16):
plt.subplot(4,4, i+1)
plt.imshow(X_test[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_pred_best[i]), fontsize=15)
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
layer_outputs= [layer.output for layer in best_model.layers]
activation_model_best=models.Model(inputs=best_model.input, outputs=layer_outputs)
print(f"There are {len(layer_outputs)} layers")
There are 2 layers
activations_best = activation_model_best.predict(X_train)
hidden_layer_activation_best = activations_best[0]
output_layer_activations_best = activations_best[1]
print(hidden_layer_activation_best.shape)
print(f"The maximum activation value of the hidden nodes in the hidden layer is {hidden_layer_activation_best.max()}")
(60000, 256) The maximum activation value of the hidden nodes in the hidden layer is 78.06275177001953
y_train_pred_best=best_model.predict_classes(X_train)
activation_data_best = {'actual_class':y_train}
for i in range(0,256):
activation_data_best["Node{}".format(i)] = hidden_layer_activation_best[:,i]
activation_df_best = pd.DataFrame(activation_data_best)
for i in range(0,10):
activation_df_best['Output_Node_{}'.format(i)]=output_layer_activations_best[:,i]
activation_df_best['best_model_prediction']=y_train_pred_best
plt.figure(figsize=(15,15))
for i in range(0,16):
plt.subplot(4,4, i+1)
plt.imshow(X_train[i].reshape(28,28)*255, cmap=plt.get_cmap('gray'))
plt.xlabel('pred={}'.format(y_train_pred_best[i]), fontsize=15)
display(activation_df_best.round(3).T.iloc[:,0:16])
plt.show()
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py:450: UserWarning: `model.predict_classes()` is deprecated and will be removed after 2021-01-01. Please use instead:* `np.argmax(model.predict(x), axis=-1)`, if your model does multi-class classification (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`, if your model does binary classification (e.g. if it uses a `sigmoid` last-layer activation).
warnings.warn('`model.predict_classes()` is deprecated and '
| 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| actual_class | 5.000 | 0.000 | 4.000 | 1.000 | 9.000 | 2.000000 | 1.000 | 3.000000 | 1.000 | 4.000 | 3.000000 | 5.000 | 3.000 | 6.000 | 1.000 | 7.000000 |
| Node0 | 1.701 | 0.446 | 0.000 | 0.000 | 3.121 | 0.000000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000000 | 0.000 | 4.649 | 0.000 | 0.000 | 5.315000 |
| Node1 | 3.228 | 0.000 | 0.000 | 2.498 | 0.000 | 0.000000 | 0.000 | 1.441000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 |
| Node2 | 17.004 | 18.525 | 10.928 | 13.751 | 14.751 | 17.051001 | 10.773 | 27.714001 | 7.985 | 21.621 | 23.124001 | 8.664 | 22.066 | 17.362 | 7.313 | 16.438999 |
| Node3 | 0.000 | 2.580 | 0.000 | 4.281 | 0.565 | 0.000000 | 0.000 | 0.000000 | 0.055 | 1.301 | 0.000000 | 3.814 | 0.000 | 0.000 | 0.374 | 5.216000 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| Output_Node_6 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 1.000 | 0.000 | 0.000000 |
| Output_Node_7 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000 | 0.000 | 1.000000 |
| Output_Node_8 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 |
| Output_Node_9 | 0.000 | 0.000 | 0.000 | 0.000 | 1.000 | 0.000000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000000 | 0.000 | 0.000 | 0.000 | 0.000 | 0.000000 |
| best_model_prediction | 5.000 | 0.000 | 4.000 | 1.000 | 9.000 | 2.000000 | 1.000 | 3.000000 | 1.000 | 4.000 | 3.000000 | 5.000 | 3.000 | 6.000 | 1.000 | 7.000000 |
268 rows × 16 columns
activation_df_best['accurate']=np.where(activation_df_best['actual_class']==activation_df_best['best_model_prediction'],1,0)
class_error_best=activation_df_best[activation_df_best['actual_class']!=activation_df_best['best_model_prediction']].groupby(['actual_class'], as_index=False)[['best_model_prediction']].agg(lambda x:x.value_counts().index[0])
class_error_best['most_common_error']=class_error_best['best_model_prediction']
del class_error_best['best_model_prediction']
class_accuracy_best=activation_df_best.groupby(['actual_class'], as_index=False)[['accurate']].mean()
class_accuracy_best=pd.merge(class_accuracy_best, class_error_best)
class_accuracy_best
| actual_class | accurate | most_common_error | |
|---|---|---|---|
| 0 | 8 | 0.999829 | 7 |
#there were no errors*
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
from sklearn.ensemble import RandomForestClassifier
# I am going to find 3 principal nodes to show that the classes cluster when trained on a larger DNN
pca = PCA(n_components=3)
principalComponents = pca.fit_transform(hidden_layer_activation_best)
pca_df=pd.DataFrame(data = principalComponents, columns = ['pc1', 'pc2', 'pc3'])
pca_df['actual_class']=y_train
pca_df
| pc1 | pc2 | pc3 | actual_class | |
|---|---|---|---|---|
| 0 | 5.026535 | -10.703937 | -14.826284 | 5 |
| 1 | 5.325309 | 28.089903 | -19.393623 | 0 |
| 2 | -39.432232 | 24.261507 | 5.702179 | 4 |
| 3 | -14.907855 | -15.436034 | -15.573255 | 1 |
| 4 | 2.529650 | -9.099765 | 23.015396 | 9 |
| ... | ... | ... | ... | ... |
| 59995 | 6.238678 | -13.512926 | -11.232577 | 8 |
| 59996 | 9.696705 | -8.564385 | -26.566299 | 3 |
| 59997 | -13.173613 | -8.744769 | 2.145115 | 5 |
| 59998 | -30.740461 | 8.808605 | -18.175611 | 6 |
| 59999 | -18.124315 | -6.677984 | -6.135028 | 8 |
60000 rows × 4 columns
fig, axes = plt.subplots(1, 3, figsize=(25,10))
sns.scatterplot(x="pc1", y="pc2", hue="actual_class", palette=sns.color_palette("hls", 10), data=pca_df, legend="full", alpha=0.3, ax=axes[0])
sns.scatterplot(x="pc1", y="pc3", hue="actual_class", palette=sns.color_palette("hls", 10), data=pca_df, legend="full", alpha=0.3, ax=axes[1])
sns.scatterplot(x="pc2", y="pc3", hue="actual_class", palette=sns.color_palette("hls", 10), data=pca_df, legend="full", alpha=0.3, ax=axes[2])
plt.show()
ax = plt.figure(figsize=(16,10)).gca(projection='3d')
ax.scatter(
xs=pca_df['pc1'],
ys=pca_df['pc2'],
zs=pca_df['pc3'],
c=pca_df.loc[:,"actual_class"],
cmap='tab10')
ax.set_xlabel('pca-one')
ax.set_ylabel('pca-two')
ax.set_zlabel('pca-three')
plt.show()
#further analysis
from matplotlib.transforms import offset_copy
pca_by_output=pd.DataFrame(hidden_layer_activation_best)
pca_by_output['actual_class']=y_train
pca = PCA(n_components=3)
pca0=pca_by_output[pca_by_output['actual_class']==0].reset_index(drop=True)
del pca0['actual_class']
pca0 = pca.fit(pca0)
pca0 = pca.transform(hidden_layer_activation_best)
pca0=pd.DataFrame(data = pca0, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca1=pca_by_output[pca_by_output['actual_class']==1].reset_index(drop=True)
del pca1['actual_class']
pca1 = pca.fit(pca1)
pca1 = pca.transform(hidden_layer_activation_best)
pca1=pd.DataFrame(data = pca1, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca2=pca_by_output[pca_by_output['actual_class']==2].reset_index(drop=True)
del pca2['actual_class']
pca2 = pca.fit(pca2)
pca2 = pca.transform(hidden_layer_activation_best)
pca2=pd.DataFrame(data = pca2, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca3=pca_by_output[pca_by_output['actual_class']==3].reset_index(drop=True)
del pca3['actual_class']
pca3 = pca.fit(pca3)
pca3 = pca.transform(hidden_layer_activation_best)
pca3=pd.DataFrame(data = pca3, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca4=pca_by_output[pca_by_output['actual_class']==4].reset_index(drop=True)
del pca4['actual_class']
pca4 = pca.fit(pca4)
pca4 = pca.transform(hidden_layer_activation_best)
pca4=pd.DataFrame(data = pca4, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca5=pca_by_output[pca_by_output['actual_class']==5].reset_index(drop=True)
del pca5['actual_class']
pca5 = pca.fit(pca5)
pca5 = pca.transform(hidden_layer_activation_best)
pca5=pd.DataFrame(data = pca5, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca6=pca_by_output[pca_by_output['actual_class']==6].reset_index(drop=True)
del pca6['actual_class']
pca6 = pca.fit(pca6)
pca6 = pca.transform(hidden_layer_activation_best)
pca6=pd.DataFrame(data = pca6, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca7=pca_by_output[pca_by_output['actual_class']==7].reset_index(drop=True)
del pca7['actual_class']
pca7 = pca.fit(pca7)
pca7 = pca.transform(hidden_layer_activation_best)
pca7=pd.DataFrame(data = pca7, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca8=pca_by_output[pca_by_output['actual_class']==8].reset_index(drop=True)
del pca8['actual_class']
pca8 = pca.fit(pca8)
pca8 = pca.transform(hidden_layer_activation_best)
pca8=pd.DataFrame(data = pca8, columns = ['pc1', 'pc2', 'pc3'])
pca = PCA(n_components=3)
pca9=pca_by_output[pca_by_output['actual_class']==9].reset_index(drop=True)
del pca9['actual_class']
pca9 = pca.fit(pca9)
pca9 = pca.transform(hidden_layer_activation_best)
pca9=pd.DataFrame(data = pca9, columns = ['pc1', 'pc2', 'pc3'])
pca_by_output_lst=[pca0,pca1,pca2,pca3,pca4,pca5,pca6,pca7,pca8,pca9]
fig, axes = plt.subplots(10, 3, figsize=(35,60))
output=0
rows = ['Class {}'.format(row) for row in range(0,9)]
for i in pca_by_output_lst:
i['actual_class']=y_train
sns.scatterplot(x="pc1", y="pc2", hue="actual_class", palette=sns.color_palette("hls", 10), data=i, legend="full", alpha=0.3, ax=axes[output,0])
sns.scatterplot(x="pc1", y="pc3", hue="actual_class", palette=sns.color_palette("hls", 10), data=i, legend="full", alpha=0.3, ax=axes[output,1])
sns.scatterplot(x="pc2", y="pc3", hue="actual_class", palette=sns.color_palette("hls", 10), data=i, legend="full", alpha=0.3, ax=axes[output,2])
output=output+1
rows = ['{} Classification'.format(row) for row in range(0,9)]
for ax, row in zip(axes[:,0], rows):
ax.annotate(row, xy=(0, 0.5), xytext=(-ax.yaxis.labelpad - 5, 0), xycoords=ax.yaxis.label, textcoords='offset points', size='large', ha='right', va='center')
fig.tight_layout()
plt.show()